mapchete-eo 2025.7.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapchete_eo/__init__.py +1 -0
- mapchete_eo/archives/__init__.py +0 -0
- mapchete_eo/archives/base.py +65 -0
- mapchete_eo/array/__init__.py +0 -0
- mapchete_eo/array/buffer.py +16 -0
- mapchete_eo/array/color.py +29 -0
- mapchete_eo/array/convert.py +157 -0
- mapchete_eo/base.py +528 -0
- mapchete_eo/blacklist.txt +175 -0
- mapchete_eo/cli/__init__.py +30 -0
- mapchete_eo/cli/bounds.py +22 -0
- mapchete_eo/cli/options_arguments.py +243 -0
- mapchete_eo/cli/s2_brdf.py +77 -0
- mapchete_eo/cli/s2_cat_results.py +146 -0
- mapchete_eo/cli/s2_find_broken_products.py +93 -0
- mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
- mapchete_eo/cli/s2_mask.py +71 -0
- mapchete_eo/cli/s2_mgrs.py +45 -0
- mapchete_eo/cli/s2_rgb.py +114 -0
- mapchete_eo/cli/s2_verify.py +129 -0
- mapchete_eo/cli/static_catalog.py +123 -0
- mapchete_eo/eostac.py +30 -0
- mapchete_eo/exceptions.py +87 -0
- mapchete_eo/geometry.py +271 -0
- mapchete_eo/image_operations/__init__.py +12 -0
- mapchete_eo/image_operations/color_correction.py +136 -0
- mapchete_eo/image_operations/compositing.py +247 -0
- mapchete_eo/image_operations/dtype_scale.py +43 -0
- mapchete_eo/image_operations/fillnodata.py +130 -0
- mapchete_eo/image_operations/filters.py +319 -0
- mapchete_eo/image_operations/linear_normalization.py +81 -0
- mapchete_eo/image_operations/sigmoidal.py +114 -0
- mapchete_eo/io/__init__.py +37 -0
- mapchete_eo/io/assets.py +492 -0
- mapchete_eo/io/items.py +147 -0
- mapchete_eo/io/levelled_cubes.py +228 -0
- mapchete_eo/io/path.py +144 -0
- mapchete_eo/io/products.py +413 -0
- mapchete_eo/io/profiles.py +45 -0
- mapchete_eo/known_catalogs.py +42 -0
- mapchete_eo/platforms/sentinel2/__init__.py +17 -0
- mapchete_eo/platforms/sentinel2/archives.py +190 -0
- mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
- mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
- mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
- mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
- mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
- mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
- mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
- mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
- mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
- mapchete_eo/platforms/sentinel2/config.py +181 -0
- mapchete_eo/platforms/sentinel2/driver.py +78 -0
- mapchete_eo/platforms/sentinel2/masks.py +325 -0
- mapchete_eo/platforms/sentinel2/metadata_parser.py +734 -0
- mapchete_eo/platforms/sentinel2/path_mappers/__init__.py +29 -0
- mapchete_eo/platforms/sentinel2/path_mappers/base.py +56 -0
- mapchete_eo/platforms/sentinel2/path_mappers/earthsearch.py +34 -0
- mapchete_eo/platforms/sentinel2/path_mappers/metadata_xml.py +135 -0
- mapchete_eo/platforms/sentinel2/path_mappers/sinergise.py +105 -0
- mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +26 -0
- mapchete_eo/platforms/sentinel2/processing_baseline.py +160 -0
- mapchete_eo/platforms/sentinel2/product.py +669 -0
- mapchete_eo/platforms/sentinel2/types.py +109 -0
- mapchete_eo/processes/__init__.py +0 -0
- mapchete_eo/processes/config.py +51 -0
- mapchete_eo/processes/dtype_scale.py +112 -0
- mapchete_eo/processes/eo_to_xarray.py +19 -0
- mapchete_eo/processes/merge_rasters.py +235 -0
- mapchete_eo/product.py +278 -0
- mapchete_eo/protocols.py +56 -0
- mapchete_eo/search/__init__.py +14 -0
- mapchete_eo/search/base.py +222 -0
- mapchete_eo/search/config.py +42 -0
- mapchete_eo/search/s2_mgrs.py +314 -0
- mapchete_eo/search/stac_search.py +251 -0
- mapchete_eo/search/stac_static.py +236 -0
- mapchete_eo/search/utm_search.py +251 -0
- mapchete_eo/settings.py +24 -0
- mapchete_eo/sort.py +48 -0
- mapchete_eo/time.py +53 -0
- mapchete_eo/types.py +73 -0
- mapchete_eo-2025.7.0.dist-info/METADATA +38 -0
- mapchete_eo-2025.7.0.dist-info/RECORD +87 -0
- mapchete_eo-2025.7.0.dist-info/WHEEL +5 -0
- mapchete_eo-2025.7.0.dist-info/entry_points.txt +11 -0
- mapchete_eo-2025.7.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Generator, Iterator, List, Optional, Sequence, Union
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import numpy.ma as ma
|
|
7
|
+
import xarray as xr
|
|
8
|
+
from mapchete.protocols import GridProtocol
|
|
9
|
+
from mapchete.types import NodataVals
|
|
10
|
+
|
|
11
|
+
from mapchete_eo.io.products import Slice
|
|
12
|
+
from mapchete_eo.array.convert import to_dataarray, to_masked_array
|
|
13
|
+
from mapchete_eo.exceptions import NoSourceProducts
|
|
14
|
+
from mapchete_eo.platforms.sentinel2.product import S2Product
|
|
15
|
+
from mapchete_eo.platforms.sentinel2.types import Resolution
|
|
16
|
+
from mapchete_eo.sort import SortMethodConfig
|
|
17
|
+
from mapchete_eo.types import MergeMethod
|
|
18
|
+
from mapchete_eo.exceptions import (
|
|
19
|
+
AssetKeyError,
|
|
20
|
+
CorruptedProduct,
|
|
21
|
+
CorruptedSlice,
|
|
22
|
+
EmptySliceException,
|
|
23
|
+
EmptyStackException,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
from mapchete_eo.protocols import EOProductProtocol
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def read_masks(
|
|
32
|
+
products: List[S2Product],
|
|
33
|
+
grid: Optional[GridProtocol] = None,
|
|
34
|
+
nodatavals: NodataVals = None,
|
|
35
|
+
product_read_kwargs: dict = {},
|
|
36
|
+
) -> ma.MaskedArray:
|
|
37
|
+
"""Read grid window of Masks and merge into a 4D xarray."""
|
|
38
|
+
return ma.stack(
|
|
39
|
+
[
|
|
40
|
+
to_masked_array(m)
|
|
41
|
+
for m in generate_masks(
|
|
42
|
+
products=products,
|
|
43
|
+
grid=grid,
|
|
44
|
+
nodatavals=nodatavals,
|
|
45
|
+
product_read_kwargs=product_read_kwargs,
|
|
46
|
+
)
|
|
47
|
+
]
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def masks_to_xarray(
|
|
52
|
+
products: List[S2Product],
|
|
53
|
+
grid: Optional[GridProtocol] = None,
|
|
54
|
+
slice_axis_name: str = "time",
|
|
55
|
+
band_axis_name: str = "bands",
|
|
56
|
+
x_axis_name: str = "x",
|
|
57
|
+
y_axis_name: str = "y",
|
|
58
|
+
merge_products_by: Optional[str] = None,
|
|
59
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
60
|
+
sort: Optional[SortMethodConfig] = None,
|
|
61
|
+
raise_empty: bool = True,
|
|
62
|
+
product_read_kwargs: dict = {},
|
|
63
|
+
) -> xr.Dataset:
|
|
64
|
+
"""Read grid window of EOProducts and merge into a 4D xarray."""
|
|
65
|
+
data_vars = [
|
|
66
|
+
s
|
|
67
|
+
for s in generate_slice_masks_dataarrays(
|
|
68
|
+
products=products,
|
|
69
|
+
grid=grid,
|
|
70
|
+
merge_products_by=merge_products_by,
|
|
71
|
+
merge_method=merge_method,
|
|
72
|
+
sort=sort,
|
|
73
|
+
product_read_kwargs=product_read_kwargs,
|
|
74
|
+
raise_empty=raise_empty,
|
|
75
|
+
)
|
|
76
|
+
]
|
|
77
|
+
if merge_products_by and merge_products_by not in ["date", "datetime"]:
|
|
78
|
+
coords = {merge_products_by: [s.name for s in data_vars]}
|
|
79
|
+
slice_axis_name = merge_products_by
|
|
80
|
+
else:
|
|
81
|
+
coords = {
|
|
82
|
+
slice_axis_name: list(
|
|
83
|
+
np.array(
|
|
84
|
+
[product.item.datetime for product in products], dtype=np.datetime64
|
|
85
|
+
)
|
|
86
|
+
)
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return xr.Dataset(
|
|
90
|
+
data_vars={s.name: s for s in data_vars},
|
|
91
|
+
coords=coords,
|
|
92
|
+
).transpose(slice_axis_name, band_axis_name, x_axis_name, y_axis_name)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def generate_masks(
|
|
96
|
+
products: List[S2Product],
|
|
97
|
+
grid: Optional[GridProtocol] = None,
|
|
98
|
+
nodatavals: NodataVals = None,
|
|
99
|
+
product_read_kwargs: dict = {},
|
|
100
|
+
) -> Iterator[xr.DataArray]:
|
|
101
|
+
"""
|
|
102
|
+
Yield masks of slices or products into a cube to support advanced cube handling as Dataarrays.
|
|
103
|
+
|
|
104
|
+
This should be analog to other read functions and can be used for read functions
|
|
105
|
+
and read_levelled_cube as input if needed.
|
|
106
|
+
|
|
107
|
+
TODO apply these masks on existing cubes (np_arrays, xarrays)
|
|
108
|
+
"""
|
|
109
|
+
if len(products) == 0:
|
|
110
|
+
raise NoSourceProducts("no products to read")
|
|
111
|
+
|
|
112
|
+
logger.debug(f"reading {len(products)} product masks")
|
|
113
|
+
|
|
114
|
+
if isinstance(nodatavals, list):
|
|
115
|
+
nodataval = nodatavals[0]
|
|
116
|
+
elif isinstance(nodatavals, float):
|
|
117
|
+
nodataval = nodatavals
|
|
118
|
+
else:
|
|
119
|
+
nodataval = nodatavals
|
|
120
|
+
|
|
121
|
+
product_read_kwargs = dict(
|
|
122
|
+
product_read_kwargs,
|
|
123
|
+
)
|
|
124
|
+
for product_ in products:
|
|
125
|
+
if grid is None:
|
|
126
|
+
grid = product_.metadata.grid(Resolution["10m"])
|
|
127
|
+
elif isinstance(grid, Resolution):
|
|
128
|
+
grid = product_.metadata.grid(grid)
|
|
129
|
+
mask_grid = product_.get_mask(
|
|
130
|
+
grid=grid,
|
|
131
|
+
mask_config=product_read_kwargs["mask_config"],
|
|
132
|
+
)
|
|
133
|
+
yield to_dataarray(
|
|
134
|
+
ma.masked_where(mask_grid == 0, ma.expand_dims(mask_grid.data, axis=0)),
|
|
135
|
+
nodataval=nodataval,
|
|
136
|
+
name=product_.id,
|
|
137
|
+
attrs=product_.item.properties,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def merge_products_masks(
|
|
142
|
+
products: Sequence[Union[S2Product, EOProductProtocol]],
|
|
143
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
144
|
+
product_read_kwargs: dict = {},
|
|
145
|
+
raise_empty: bool = False,
|
|
146
|
+
) -> ma.MaskedArray:
|
|
147
|
+
"""
|
|
148
|
+
Merge given products masks into one array.
|
|
149
|
+
Should be analog to the merging of product slices with default 'first' method and 'all' method to use all available masks in datastripe/slice.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
if merge_method == "average":
|
|
153
|
+
raise ValueError(
|
|
154
|
+
"Merge Method 'average' makes no sense for 'merge_products_masks' either 'first' or 'all'!"
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
def read_remaining_valid_products_masks(
|
|
158
|
+
products_iter: Iterator[Union[S2Product, EOProductProtocol]],
|
|
159
|
+
product_read_kwargs: dict,
|
|
160
|
+
) -> Generator[ma.MaskedArray, None, None]:
|
|
161
|
+
"""Yields and reads remaining products masks from iterator while discarding corrupt products."""
|
|
162
|
+
try:
|
|
163
|
+
for product in products_iter:
|
|
164
|
+
try:
|
|
165
|
+
new = np.expand_dims(
|
|
166
|
+
product.get_mask(**product_read_kwargs).data, axis=0
|
|
167
|
+
)
|
|
168
|
+
yield ma.masked_array(data=new.astype(bool, copy=False))
|
|
169
|
+
except (AssetKeyError, CorruptedProduct) as exc:
|
|
170
|
+
logger.debug("skip product %s because of %s", product.item.id, exc)
|
|
171
|
+
except StopIteration:
|
|
172
|
+
return
|
|
173
|
+
|
|
174
|
+
if len(products) == 0: # pragma: no cover
|
|
175
|
+
raise NoSourceProducts("no products to merge")
|
|
176
|
+
|
|
177
|
+
products_iter = iter(products)
|
|
178
|
+
|
|
179
|
+
# read first valid product
|
|
180
|
+
for product in products_iter:
|
|
181
|
+
try:
|
|
182
|
+
out: ma.MaskedArray = ma.masked_array(
|
|
183
|
+
data=np.expand_dims(
|
|
184
|
+
product.get_mask(**product_read_kwargs).data, axis=0
|
|
185
|
+
).astype(bool, copy=False)
|
|
186
|
+
)
|
|
187
|
+
break
|
|
188
|
+
except (AssetKeyError, CorruptedProduct) as exc:
|
|
189
|
+
logger.debug("skip product mask %s because of %s", product.item.id, exc)
|
|
190
|
+
else:
|
|
191
|
+
# we cannot do anything here, as all products are broken
|
|
192
|
+
raise CorruptedSlice("all products (masks) are broken here")
|
|
193
|
+
|
|
194
|
+
# fill in gaps sequentially, product by product
|
|
195
|
+
if merge_method == MergeMethod.first:
|
|
196
|
+
for new in read_remaining_valid_products_masks(
|
|
197
|
+
products_iter, product_read_kwargs
|
|
198
|
+
):
|
|
199
|
+
masked = out.mask
|
|
200
|
+
# Update values at masked locations
|
|
201
|
+
out[masked] = new[masked].astype(bool, copy=False)
|
|
202
|
+
# if whole output array is filled, there is no point in reading more data
|
|
203
|
+
if out.all():
|
|
204
|
+
return out
|
|
205
|
+
|
|
206
|
+
# read all and concatate
|
|
207
|
+
elif merge_method == MergeMethod.all:
|
|
208
|
+
|
|
209
|
+
def _generate_arrays(
|
|
210
|
+
first_product_array: ma.MaskedArray,
|
|
211
|
+
remaining_product_arrays: Generator[ma.MaskedArray, None, None],
|
|
212
|
+
) -> Generator[ma.MaskedArray, None, None]:
|
|
213
|
+
"""Yield all available product arrays."""
|
|
214
|
+
yield first_product_array
|
|
215
|
+
yield from remaining_product_arrays
|
|
216
|
+
|
|
217
|
+
# explicitly specify dtype to avoid casting of integer arrays to floats
|
|
218
|
+
# during mean conversion:
|
|
219
|
+
# https://numpy.org/doc/stable/reference/generated/numpy.mean.html#numpy.mean
|
|
220
|
+
arrays = list(
|
|
221
|
+
_generate_arrays(
|
|
222
|
+
out,
|
|
223
|
+
read_remaining_valid_products_masks(products_iter, product_read_kwargs),
|
|
224
|
+
)
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
# Filter out arrays that are entirely masked
|
|
228
|
+
valid_arrays = [a for a in arrays if not ma.getmaskarray(a).all()]
|
|
229
|
+
|
|
230
|
+
if valid_arrays:
|
|
231
|
+
stacked = ma.stack(valid_arrays, dtype=out.dtype)
|
|
232
|
+
out = stacked.min(axis=0)
|
|
233
|
+
else:
|
|
234
|
+
# All arrays were fully masked — return fully masked output
|
|
235
|
+
out = ma.masked_all(out.shape, dtype=out.dtype)
|
|
236
|
+
|
|
237
|
+
else: # pragma: no cover
|
|
238
|
+
raise NotImplementedError(f"unknown merge method: {merge_method}")
|
|
239
|
+
|
|
240
|
+
if raise_empty and out.all():
|
|
241
|
+
raise EmptySliceException(
|
|
242
|
+
f"slice is empty after combining {len(products)} products"
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
return out
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def generate_slice_masks_dataarrays(
|
|
249
|
+
products: List[S2Product],
|
|
250
|
+
grid: Optional[GridProtocol] = None,
|
|
251
|
+
merge_products_by: Optional[str] = None,
|
|
252
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
253
|
+
sort: Optional[SortMethodConfig] = None,
|
|
254
|
+
mask_name: List = ["EOxCloudless_masks"],
|
|
255
|
+
product_read_kwargs: dict = {},
|
|
256
|
+
raise_empty: bool = True,
|
|
257
|
+
) -> Iterator[xr.DataArray]:
|
|
258
|
+
"""
|
|
259
|
+
Yield products or merged products into slices as DataArrays.
|
|
260
|
+
"""
|
|
261
|
+
if len(products) == 0:
|
|
262
|
+
raise NoSourceProducts("no products to read")
|
|
263
|
+
|
|
264
|
+
stack_empty = True
|
|
265
|
+
|
|
266
|
+
# group products into slices and sort slices if configured
|
|
267
|
+
slices = product_masks_to_slices(
|
|
268
|
+
products, group_by_property=merge_products_by, sort=sort
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
logger.debug(
|
|
272
|
+
"reading %s products in %s groups...",
|
|
273
|
+
len(products),
|
|
274
|
+
len(slices),
|
|
275
|
+
)
|
|
276
|
+
for slice in slices:
|
|
277
|
+
try:
|
|
278
|
+
# if merge_products_by is none, each slice contains just one product
|
|
279
|
+
# so nothing will have to be merged anyways
|
|
280
|
+
with slice.cached():
|
|
281
|
+
yield to_dataarray(
|
|
282
|
+
merge_products_masks(
|
|
283
|
+
products=slice.products,
|
|
284
|
+
merge_method=merge_method,
|
|
285
|
+
product_read_kwargs=dict(
|
|
286
|
+
product_read_kwargs,
|
|
287
|
+
grid=grid,
|
|
288
|
+
),
|
|
289
|
+
raise_empty=raise_empty,
|
|
290
|
+
),
|
|
291
|
+
# nodataval=1.0,
|
|
292
|
+
name=slice.name,
|
|
293
|
+
band_names=mask_name,
|
|
294
|
+
attrs=slice.properties,
|
|
295
|
+
)
|
|
296
|
+
# if at least one slice can be yielded, the stack is not empty
|
|
297
|
+
stack_empty = False
|
|
298
|
+
except (EmptySliceException, CorruptedSlice):
|
|
299
|
+
pass
|
|
300
|
+
|
|
301
|
+
if stack_empty:
|
|
302
|
+
raise EmptyStackException("all slices are empty")
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def product_masks_to_slices(
|
|
306
|
+
products: List[S2Product],
|
|
307
|
+
group_by_property: Optional[str] = None,
|
|
308
|
+
sort: Optional[SortMethodConfig] = None,
|
|
309
|
+
) -> List[Slice]:
|
|
310
|
+
"""Group products per given property into Slice objects and optionally sort slices."""
|
|
311
|
+
if group_by_property:
|
|
312
|
+
grouped = defaultdict(list)
|
|
313
|
+
for product in products:
|
|
314
|
+
grouped[product.get_property(group_by_property)].append(product)
|
|
315
|
+
slices = [Slice(key, products) for key, products in grouped.items()]
|
|
316
|
+
else:
|
|
317
|
+
slices = [Slice(product.item.id, [product]) for product in products]
|
|
318
|
+
|
|
319
|
+
# also check if slices is even a list, otherwise it will raise an error
|
|
320
|
+
if sort and slices:
|
|
321
|
+
sort_dict = sort.model_dump()
|
|
322
|
+
func = sort_dict.pop("func")
|
|
323
|
+
slices = func(slices, **sort_dict)
|
|
324
|
+
|
|
325
|
+
return slices
|